import pandas as pd
signal_df = pd.read_csv('NN Project Data - Signal.csv')
signal_df.shape
(1599, 12)
numeric_columns = signal_df.select_dtypes(include=['int', 'float'])
missing_values = numeric_columns.isna().sum()
print("Missing Values in Numeric Columns:")
print(missing_values)
Missing Values in Numeric Columns: Parameter 1 0 Parameter 2 0 Parameter 3 0 Parameter 4 0 Parameter 5 0 Parameter 6 0 Parameter 7 0 Parameter 8 0 Parameter 9 0 Parameter 10 0 Parameter 11 0 Signal_Strength 0 dtype: int64
signal_df.info()
<class 'pandas.core.frame.DataFrame'> RangeIndex: 1599 entries, 0 to 1598 Data columns (total 12 columns): # Column Non-Null Count Dtype --- ------ -------------- ----- 0 Parameter 1 1599 non-null float64 1 Parameter 2 1599 non-null float64 2 Parameter 3 1599 non-null float64 3 Parameter 4 1599 non-null float64 4 Parameter 5 1599 non-null float64 5 Parameter 6 1599 non-null float64 6 Parameter 7 1599 non-null float64 7 Parameter 8 1599 non-null float64 8 Parameter 9 1599 non-null float64 9 Parameter 10 1599 non-null float64 10 Parameter 11 1599 non-null float64 11 Signal_Strength 1599 non-null int64 dtypes: float64(11), int64(1) memory usage: 150.0 KB
duplicates = signal_df[signal_df.duplicated()]
num_duplicates = len(duplicates)
print(f"Number of duplicate rows: {num_duplicates}")
Number of duplicate rows: 240
signal_df_cleaned = signal_df.drop_duplicates() # Remove duplicate rows
signal_df_imputed = signal_df_cleaned.fillna(signal_df_cleaned.mean())
signal_df = signal_df_imputed
def replace_outliers_with_mean_or_median(df, column, method='median', threshold=1.5):
if method == 'median':
# Calculate the median
median = df[column].median()
# Calculate the IQR
Q1 = df[column].quantile(0.25)
Q3 = df[column].quantile(0.75)
IQR = Q3 - Q1
# Set the boundaries
lower_bound = Q1 - threshold * IQR
upper_bound = Q3 + threshold * IQR
# Replace outliers with median
df[column] = df[column].apply(lambda x: median if x < lower_bound or x > upper_bound else x)
elif method == 'mean':
# Calculate the mean
mean = df[column].mean()
# Calculate the standard deviation
std = df[column].std()
# Set the boundaries
lower_bound = mean - threshold * std
upper_bound = mean + threshold * std
# Replace outliers with mean
df[column] = df[column].apply(lambda x: mean if x < lower_bound or x > upper_bound else x)
return df
for column in signal_df.columns:
signal_df = replace_outliers_with_mean_or_median(signal_df, column, method='median')
signal_df.shape
(1359, 12)
import matplotlib.pyplot as plt
import seaborn as sns
target_variable = "Signal_Strength"
plt.figure(figsize=(8, 6))
sns.histplot(signal_df[target_variable], kde=True, color='blue')
plt.title(f'Distribution of {target_variable}')
plt.xlabel(target_variable)
plt.ylabel('Frequency')
plt.show()
C:\Users\Admin\anaconda3.1\lib\site-packages\scipy\__init__.py:155: UserWarning: A NumPy version >=1.18.5 and <1.25.0 is required for this version of SciPy (detected version 1.26.1
warnings.warn(f"A NumPy version >={np_minversion} and <{np_maxversion}"
plt.figure(figsize=(30, 20))
sns.pairplot(signal_df,diag_kind='kde')
plt.show()
<Figure size 3000x2000 with 0 Axes>
Y = signal_df["Signal_Strength"]
X = signal_df.drop("Signal_Strength", axis=1)
from sklearn.model_selection import train_test_split
X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.3, random_state=42)
print("X_train shape:", X_train.shape)
print("X_test shape:", X_test.shape)
print("Y_train shape:", Y_train.shape)
print("Y_test shape:", Y_test.shape)
if X_train.shape[0] + X_test.shape[0] == X.shape[0] and Y_train.shape[0] + Y_test.shape[0] == Y.shape[0]:
print("Train and test data are in sync.")
else:
print("Train and test data are not in sync.")
X_train shape: (951, 11) X_test shape: (408, 11) Y_train shape: (951,) Y_test shape: (408,) Train and test data are in sync.
from sklearn.preprocessing import StandardScaler
scaler = StandardScaler()
X_train_normalized = scaler.fit_transform(X_train)
X_test_normalized = scaler.transform(X_test)
signal_df['Signal_Strength'].value_counts()
5.0 577 6.0 562 7.0 167 4.0 53 Name: Signal_Strength, dtype: int64
# Using one-hot encoding on target variable
Y_train = pd.get_dummies(Y_train)
Y_test = pd.get_dummies(Y_test)
print(Y_train.shape)
print(Y_test.shape)
(951, 4) (408, 4)
from tensorflow import keras
from tensorflow.keras import layers
# Create a neural network model
model = keras.Sequential()
# Add input and hidden layers
model.add(layers.Input(shape=(X_train_normalized.shape[1],)))
# Add one or more hidden layers with activation functions
model.add(layers.Dense(64, activation='relu'))
model.add(layers.Dense(32, activation='relu'))
num_classes = 4
model.add(layers.Dense(num_classes, activation='softmax'))
# Compile the model with appropriate loss and optimizer
model.compile(loss='categorical_crossentropy', optimizer='rmsprop', metrics=['accuracy'])
# Train the model using the one-hot encoded target variable
history_old = model.fit(X_train_normalized, Y_train, epochs=10,validation_data=(X_test_normalized, Y_test))
# Evaluate the model on the test data
test_loss, test_accuracy = model.evaluate(X_test_normalized, Y_test)
print(f"Test Accuracy: {test_accuracy}")
Epoch 1/10 30/30 [==============================] - 1s 10ms/step - loss: 1.2842 - accuracy: 0.3964 - val_loss: 1.1089 - val_accuracy: 0.5343 Epoch 2/10 30/30 [==============================] - 0s 4ms/step - loss: 1.0571 - accuracy: 0.5563 - val_loss: 0.9730 - val_accuracy: 0.5907 Epoch 3/10 30/30 [==============================] - 0s 5ms/step - loss: 0.9714 - accuracy: 0.5773 - val_loss: 0.9089 - val_accuracy: 0.6005 Epoch 4/10 30/30 [==============================] - 0s 4ms/step - loss: 0.9345 - accuracy: 0.5836 - val_loss: 0.8875 - val_accuracy: 0.6029 Epoch 5/10 30/30 [==============================] - 0s 5ms/step - loss: 0.9149 - accuracy: 0.5931 - val_loss: 0.8782 - val_accuracy: 0.6005 Epoch 6/10 30/30 [==============================] - 0s 5ms/step - loss: 0.9018 - accuracy: 0.5973 - val_loss: 0.8731 - val_accuracy: 0.6054 Epoch 7/10 30/30 [==============================] - 0s 5ms/step - loss: 0.8898 - accuracy: 0.6025 - val_loss: 0.8718 - val_accuracy: 0.5931 Epoch 8/10 30/30 [==============================] - 0s 5ms/step - loss: 0.8775 - accuracy: 0.6025 - val_loss: 0.8782 - val_accuracy: 0.5980 Epoch 9/10 30/30 [==============================] - 0s 5ms/step - loss: 0.8661 - accuracy: 0.6183 - val_loss: 0.8735 - val_accuracy: 0.5907 Epoch 10/10 30/30 [==============================] - 0s 5ms/step - loss: 0.8586 - accuracy: 0.6257 - val_loss: 0.8748 - val_accuracy: 0.5956 13/13 [==============================] - 0s 3ms/step - loss: 0.8748 - accuracy: 0.5956 Test Accuracy: 0.595588207244873
import matplotlib.pyplot as plt
# Extract the training and validation metrics from the model's history
training_loss = history_old.history['loss']
validation_loss = history_old.history['val_loss']
training_accuracy = history_old.history['accuracy']
validation_accuracy = history_old.history['val_accuracy']
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 4))
ax1.set_title('Training and Validation Loss')
ax1.set_xlabel('Epoch')
ax1.set_ylabel('Loss')
ax1.plot(training_loss, label='Training Loss')
ax1.plot(validation_loss, label='Validation Loss')
ax1.legend()
ax2.set_title('Training and Validation Accuracy')
ax2.set_xlabel('Epoch')
ax2.set_ylabel('Accuracy')
ax2.plot(training_accuracy, label='Training Accuracy')
ax2.plot(validation_accuracy, label='Validation Accuracy')
ax2.legend()
plt.tight_layout()
plt.show()
from tensorflow import keras
from tensorflow.keras import layers
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.callbacks import EarlyStopping, LearningRateScheduler
# Define a learning rate schedule
def lr_schedule(epoch):
initial_lr = 0.001
if epoch < 10:
return initial_lr
else:
return initial_lr * 0.9
learning_rate_scheduler = LearningRateScheduler(lr_schedule)
model2 = keras.Sequential()
model2.add(layers.BatchNormalization())
model2.add(layers.Dense(4, activation='softmax'))
model2.compile(loss='categorical_crossentropy', optimizer=Adam(), metrics=['accuracy'])
# Implement early stopping to prevent overfitting
early_stopping = EarlyStopping(monitor='val_loss', patience=8, restore_best_weights=True)
history_new = model2.fit(X_train_normalized, Y_train, epochs=150, batch_size=20, validation_data=(X_test_normalized, Y_test), callbacks=[early_stopping, learning_rate_scheduler])
test_loss, test_accuracy = model2.evaluate(X_test_normalized, Y_test)
print(f"Test Accuracy: {test_accuracy}")
Epoch 1/150 48/48 [==============================] - 1s 7ms/step - loss: 2.1058 - accuracy: 0.2061 - val_loss: 2.0478 - val_accuracy: 0.2181 - lr: 0.0010 Epoch 2/150 48/48 [==============================] - 0s 4ms/step - loss: 1.8612 - accuracy: 0.2461 - val_loss: 1.8230 - val_accuracy: 0.2475 - lr: 0.0010 Epoch 3/150 48/48 [==============================] - 0s 4ms/step - loss: 1.6758 - accuracy: 0.2776 - val_loss: 1.6410 - val_accuracy: 0.2892 - lr: 0.0010 Epoch 4/150 48/48 [==============================] - 0s 5ms/step - loss: 1.5300 - accuracy: 0.3207 - val_loss: 1.4996 - val_accuracy: 0.3260 - lr: 0.0010 Epoch 5/150 48/48 [==============================] - 0s 5ms/step - loss: 1.4177 - accuracy: 0.3638 - val_loss: 1.3867 - val_accuracy: 0.3603 - lr: 0.0010 Epoch 6/150 48/48 [==============================] - 0s 5ms/step - loss: 1.3447 - accuracy: 0.3943 - val_loss: 1.2961 - val_accuracy: 0.4093 - lr: 0.0010 Epoch 7/150 48/48 [==============================] - 0s 5ms/step - loss: 1.2620 - accuracy: 0.4322 - val_loss: 1.2248 - val_accuracy: 0.4681 - lr: 0.0010 Epoch 8/150 48/48 [==============================] - 0s 4ms/step - loss: 1.2059 - accuracy: 0.4606 - val_loss: 1.1677 - val_accuracy: 0.5025 - lr: 0.0010 Epoch 9/150 48/48 [==============================] - 0s 6ms/step - loss: 1.1706 - accuracy: 0.5005 - val_loss: 1.1196 - val_accuracy: 0.5196 - lr: 0.0010 Epoch 10/150 48/48 [==============================] - 0s 6ms/step - loss: 1.1285 - accuracy: 0.5110 - val_loss: 1.0804 - val_accuracy: 0.5319 - lr: 0.0010 Epoch 11/150 48/48 [==============================] - 0s 10ms/step - loss: 1.1024 - accuracy: 0.5457 - val_loss: 1.0486 - val_accuracy: 0.5466 - lr: 9.0000e-04 Epoch 12/150 48/48 [==============================] - 0s 5ms/step - loss: 1.0768 - accuracy: 0.5521 - val_loss: 1.0228 - val_accuracy: 0.5564 - lr: 9.0000e-04 Epoch 13/150 48/48 [==============================] - 0s 4ms/step - loss: 1.0598 - accuracy: 0.5499 - val_loss: 1.0007 - val_accuracy: 0.5662 - lr: 9.0000e-04 Epoch 14/150 48/48 [==============================] - 0s 3ms/step - loss: 1.0448 - accuracy: 0.5731 - val_loss: 0.9805 - val_accuracy: 0.5882 - lr: 9.0000e-04 Epoch 15/150 48/48 [==============================] - 0s 3ms/step - loss: 1.0248 - accuracy: 0.5626 - val_loss: 0.9638 - val_accuracy: 0.5882 - lr: 9.0000e-04 Epoch 16/150 48/48 [==============================] - 0s 3ms/step - loss: 1.0136 - accuracy: 0.5773 - val_loss: 0.9496 - val_accuracy: 0.5882 - lr: 9.0000e-04 Epoch 17/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9992 - accuracy: 0.5689 - val_loss: 0.9346 - val_accuracy: 0.5882 - lr: 9.0000e-04 Epoch 18/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9997 - accuracy: 0.5825 - val_loss: 0.9228 - val_accuracy: 0.5907 - lr: 9.0000e-04 Epoch 19/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9912 - accuracy: 0.5762 - val_loss: 0.9143 - val_accuracy: 0.5931 - lr: 9.0000e-04 Epoch 20/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9832 - accuracy: 0.5857 - val_loss: 0.9065 - val_accuracy: 0.5980 - lr: 9.0000e-04 Epoch 21/150 48/48 [==============================] - 0s 4ms/step - loss: 0.9747 - accuracy: 0.5878 - val_loss: 0.8988 - val_accuracy: 0.6054 - lr: 9.0000e-04 Epoch 22/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9730 - accuracy: 0.5952 - val_loss: 0.8931 - val_accuracy: 0.5956 - lr: 9.0000e-04 Epoch 23/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9717 - accuracy: 0.5804 - val_loss: 0.8884 - val_accuracy: 0.5980 - lr: 9.0000e-04 Epoch 24/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9625 - accuracy: 0.5889 - val_loss: 0.8827 - val_accuracy: 0.6054 - lr: 9.0000e-04 Epoch 25/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9712 - accuracy: 0.5910 - val_loss: 0.8795 - val_accuracy: 0.6029 - lr: 9.0000e-04 Epoch 26/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9611 - accuracy: 0.5836 - val_loss: 0.8754 - val_accuracy: 0.6029 - lr: 9.0000e-04 Epoch 27/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9461 - accuracy: 0.5889 - val_loss: 0.8718 - val_accuracy: 0.6078 - lr: 9.0000e-04 Epoch 28/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9561 - accuracy: 0.5762 - val_loss: 0.8689 - val_accuracy: 0.6029 - lr: 9.0000e-04 Epoch 29/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9502 - accuracy: 0.5815 - val_loss: 0.8665 - val_accuracy: 0.6029 - lr: 9.0000e-04 Epoch 30/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9456 - accuracy: 0.5983 - val_loss: 0.8642 - val_accuracy: 0.6054 - lr: 9.0000e-04 Epoch 31/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9489 - accuracy: 0.5752 - val_loss: 0.8624 - val_accuracy: 0.6078 - lr: 9.0000e-04 Epoch 32/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9426 - accuracy: 0.5762 - val_loss: 0.8609 - val_accuracy: 0.6103 - lr: 9.0000e-04 Epoch 33/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9465 - accuracy: 0.5941 - val_loss: 0.8593 - val_accuracy: 0.6103 - lr: 9.0000e-04 Epoch 34/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9348 - accuracy: 0.5962 - val_loss: 0.8575 - val_accuracy: 0.6127 - lr: 9.0000e-04 Epoch 35/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9309 - accuracy: 0.5804 - val_loss: 0.8553 - val_accuracy: 0.6103 - lr: 9.0000e-04 Epoch 36/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9438 - accuracy: 0.5794 - val_loss: 0.8540 - val_accuracy: 0.6103 - lr: 9.0000e-04 Epoch 37/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9427 - accuracy: 0.5846 - val_loss: 0.8534 - val_accuracy: 0.6176 - lr: 9.0000e-04 Epoch 38/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9314 - accuracy: 0.5910 - val_loss: 0.8522 - val_accuracy: 0.6152 - lr: 9.0000e-04 Epoch 39/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9356 - accuracy: 0.5920 - val_loss: 0.8518 - val_accuracy: 0.6127 - lr: 9.0000e-04 Epoch 40/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9410 - accuracy: 0.5815 - val_loss: 0.8507 - val_accuracy: 0.6152 - lr: 9.0000e-04 Epoch 41/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9383 - accuracy: 0.5920 - val_loss: 0.8505 - val_accuracy: 0.6152 - lr: 9.0000e-04 Epoch 42/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9273 - accuracy: 0.5857 - val_loss: 0.8492 - val_accuracy: 0.6152 - lr: 9.0000e-04 Epoch 43/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9385 - accuracy: 0.5868 - val_loss: 0.8480 - val_accuracy: 0.6176 - lr: 9.0000e-04 Epoch 44/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9388 - accuracy: 0.5836 - val_loss: 0.8473 - val_accuracy: 0.6250 - lr: 9.0000e-04 Epoch 45/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9415 - accuracy: 0.5752 - val_loss: 0.8473 - val_accuracy: 0.6176 - lr: 9.0000e-04 Epoch 46/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9292 - accuracy: 0.5762 - val_loss: 0.8477 - val_accuracy: 0.6201 - lr: 9.0000e-04 Epoch 47/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9472 - accuracy: 0.5741 - val_loss: 0.8468 - val_accuracy: 0.6225 - lr: 9.0000e-04 Epoch 48/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9240 - accuracy: 0.5962 - val_loss: 0.8457 - val_accuracy: 0.6225 - lr: 9.0000e-04 Epoch 49/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9324 - accuracy: 0.5899 - val_loss: 0.8467 - val_accuracy: 0.6225 - lr: 9.0000e-04 Epoch 50/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9407 - accuracy: 0.5773 - val_loss: 0.8464 - val_accuracy: 0.6152 - lr: 9.0000e-04 Epoch 51/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9379 - accuracy: 0.5868 - val_loss: 0.8464 - val_accuracy: 0.6225 - lr: 9.0000e-04 Epoch 52/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9318 - accuracy: 0.5752 - val_loss: 0.8454 - val_accuracy: 0.6201 - lr: 9.0000e-04 Epoch 53/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9273 - accuracy: 0.5762 - val_loss: 0.8451 - val_accuracy: 0.6250 - lr: 9.0000e-04 Epoch 54/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9314 - accuracy: 0.5794 - val_loss: 0.8453 - val_accuracy: 0.6250 - lr: 9.0000e-04 Epoch 55/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9308 - accuracy: 0.5846 - val_loss: 0.8445 - val_accuracy: 0.6299 - lr: 9.0000e-04 Epoch 56/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9293 - accuracy: 0.5741 - val_loss: 0.8448 - val_accuracy: 0.6299 - lr: 9.0000e-04 Epoch 57/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9196 - accuracy: 0.5815 - val_loss: 0.8437 - val_accuracy: 0.6250 - lr: 9.0000e-04 Epoch 58/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9287 - accuracy: 0.5825 - val_loss: 0.8425 - val_accuracy: 0.6225 - lr: 9.0000e-04 Epoch 59/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9322 - accuracy: 0.5783 - val_loss: 0.8432 - val_accuracy: 0.6250 - lr: 9.0000e-04 Epoch 60/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9265 - accuracy: 0.5868 - val_loss: 0.8431 - val_accuracy: 0.6275 - lr: 9.0000e-04 Epoch 61/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9243 - accuracy: 0.5889 - val_loss: 0.8418 - val_accuracy: 0.6250 - lr: 9.0000e-04 Epoch 62/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9289 - accuracy: 0.5910 - val_loss: 0.8426 - val_accuracy: 0.6275 - lr: 9.0000e-04 Epoch 63/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9276 - accuracy: 0.5794 - val_loss: 0.8427 - val_accuracy: 0.6299 - lr: 9.0000e-04 Epoch 64/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9189 - accuracy: 0.5794 - val_loss: 0.8421 - val_accuracy: 0.6250 - lr: 9.0000e-04 Epoch 65/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9097 - accuracy: 0.6036 - val_loss: 0.8422 - val_accuracy: 0.6299 - lr: 9.0000e-04 Epoch 66/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9191 - accuracy: 0.5920 - val_loss: 0.8414 - val_accuracy: 0.6275 - lr: 9.0000e-04 Epoch 67/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9293 - accuracy: 0.5794 - val_loss: 0.8417 - val_accuracy: 0.6299 - lr: 9.0000e-04 Epoch 68/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9262 - accuracy: 0.5931 - val_loss: 0.8410 - val_accuracy: 0.6275 - lr: 9.0000e-04 Epoch 69/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9206 - accuracy: 0.5910 - val_loss: 0.8401 - val_accuracy: 0.6299 - lr: 9.0000e-04 Epoch 70/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9196 - accuracy: 0.5899 - val_loss: 0.8414 - val_accuracy: 0.6275 - lr: 9.0000e-04 Epoch 71/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9209 - accuracy: 0.5783 - val_loss: 0.8409 - val_accuracy: 0.6275 - lr: 9.0000e-04 Epoch 72/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9232 - accuracy: 0.5910 - val_loss: 0.8405 - val_accuracy: 0.6275 - lr: 9.0000e-04 Epoch 73/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9316 - accuracy: 0.5889 - val_loss: 0.8399 - val_accuracy: 0.6299 - lr: 9.0000e-04 Epoch 74/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9343 - accuracy: 0.5678 - val_loss: 0.8394 - val_accuracy: 0.6275 - lr: 9.0000e-04 Epoch 75/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9275 - accuracy: 0.5731 - val_loss: 0.8408 - val_accuracy: 0.6250 - lr: 9.0000e-04 Epoch 76/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9279 - accuracy: 0.5699 - val_loss: 0.8406 - val_accuracy: 0.6275 - lr: 9.0000e-04 Epoch 77/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9267 - accuracy: 0.5825 - val_loss: 0.8394 - val_accuracy: 0.6299 - lr: 9.0000e-04 Epoch 78/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9247 - accuracy: 0.5931 - val_loss: 0.8400 - val_accuracy: 0.6275 - lr: 9.0000e-04 Epoch 79/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9282 - accuracy: 0.5731 - val_loss: 0.8402 - val_accuracy: 0.6250 - lr: 9.0000e-04 Epoch 80/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9102 - accuracy: 0.5920 - val_loss: 0.8396 - val_accuracy: 0.6275 - lr: 9.0000e-04 Epoch 81/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9144 - accuracy: 0.5931 - val_loss: 0.8392 - val_accuracy: 0.6250 - lr: 9.0000e-04 Epoch 82/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9264 - accuracy: 0.5783 - val_loss: 0.8398 - val_accuracy: 0.6201 - lr: 9.0000e-04 Epoch 83/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9243 - accuracy: 0.5741 - val_loss: 0.8393 - val_accuracy: 0.6275 - lr: 9.0000e-04 Epoch 84/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9104 - accuracy: 0.5868 - val_loss: 0.8389 - val_accuracy: 0.6275 - lr: 9.0000e-04 Epoch 85/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9336 - accuracy: 0.5783 - val_loss: 0.8391 - val_accuracy: 0.6176 - lr: 9.0000e-04 Epoch 86/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9142 - accuracy: 0.5868 - val_loss: 0.8393 - val_accuracy: 0.6201 - lr: 9.0000e-04 Epoch 87/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9230 - accuracy: 0.5910 - val_loss: 0.8393 - val_accuracy: 0.6250 - lr: 9.0000e-04 Epoch 88/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9221 - accuracy: 0.5783 - val_loss: 0.8384 - val_accuracy: 0.6250 - lr: 9.0000e-04 Epoch 89/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9212 - accuracy: 0.5815 - val_loss: 0.8393 - val_accuracy: 0.6225 - lr: 9.0000e-04 Epoch 90/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9190 - accuracy: 0.5846 - val_loss: 0.8390 - val_accuracy: 0.6225 - lr: 9.0000e-04 Epoch 91/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9175 - accuracy: 0.5931 - val_loss: 0.8386 - val_accuracy: 0.6201 - lr: 9.0000e-04 Epoch 92/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9198 - accuracy: 0.5878 - val_loss: 0.8391 - val_accuracy: 0.6225 - lr: 9.0000e-04 Epoch 93/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9356 - accuracy: 0.5836 - val_loss: 0.8402 - val_accuracy: 0.6225 - lr: 9.0000e-04 Epoch 94/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9227 - accuracy: 0.5752 - val_loss: 0.8410 - val_accuracy: 0.6225 - lr: 9.0000e-04 Epoch 95/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9230 - accuracy: 0.5762 - val_loss: 0.8402 - val_accuracy: 0.6250 - lr: 9.0000e-04 Epoch 96/150 48/48 [==============================] - 0s 3ms/step - loss: 0.9256 - accuracy: 0.5825 - val_loss: 0.8397 - val_accuracy: 0.6176 - lr: 9.0000e-04 13/13 [==============================] - 0s 2ms/step - loss: 0.8384 - accuracy: 0.6250 Test Accuracy: 0.625
training_loss = history_new.history['loss']
validation_loss = history_new.history['val_loss']
training_accuracy = history_new.history['accuracy']
validation_accuracy = history_new.history['val_accuracy']
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 4))
ax1.set_title('Training and Validation Loss')
ax1.set_xlabel('Epoch')
ax1.set_ylabel('Loss')
ax1.plot(training_loss, label='Training Loss')
ax1.plot(validation_loss, label='Validation Loss')
ax1.legend()
ax2.set_title('Training and Validation Accuracy')
ax2.set_xlabel('Epoch')
ax2.set_ylabel('Accuracy')
ax2.plot(training_accuracy, label='Training Accuracy')
ax2.plot(validation_accuracy, label='Validation Accuracy')
ax2.legend()
plt.tight_layout()
plt.show()
val_loss_model1, val_acc_model1 = model.evaluate(X_test_normalized, Y_test)
val_loss_model2, val_acc_model2 = model2.evaluate(X_test_normalized, Y_test)
# Compare validation metrics
if val_acc_model1 > val_acc_model2:
print("Model 1 has a higher validation accuracy.")
else:
print("Model 2 has a higher validation accuracy.")
if val_loss_model1 < val_loss_model2:
print("Model 1 has a lower validation loss.")
else:
print("Model 2 has a lower validation loss.")
13/13 [==============================] - 0s 3ms/step - loss: 0.8748 - accuracy: 0.5956 13/13 [==============================] - 0s 3ms/step - loss: 0.8384 - accuracy: 0.6250 Model 2 has a higher validation accuracy. Model 2 has a lower validation loss.
import h5py
file_path = 'Autonomous_Vehicles_SVHN_single_grey1.h5'
with h5py.File(file_path, 'r') as file:
# Define a function to recursively print all keys
def print_all_keys(name, obj):
if isinstance(obj, h5py.Group):
print(f"Group: {name}")
else:
print(f"Dataset: {name}")
# Visit all items in the HDF5 file and call the function
file.visititems(print_all_keys)
Dataset: X_test Dataset: X_train Dataset: X_val Dataset: y_test Dataset: y_train Dataset: y_val
import h5py
import numpy as np
file_path = 'Autonomous_Vehicles_SVHN_single_grey1.h5'
with h5py.File(file_path, 'r') as file:
X_test = np.array(file['X_test'])
X_train = np.array(file['X_train'])
X_val = np.array(file['X_val'])
y_test = np.array(file['y_test'])
y_train = np.array(file['y_train'])
y_val = np.array(file['y_val'])
C:\Users\Admin\AppData\Local\Temp\ipykernel_2192\3265996366.py:7: DeprecationWarning: `product` is deprecated as of NumPy 1.25.0, and will be removed in NumPy 2.0. Please use `prod` instead. X_test = np.array(file['X_test']) C:\Users\Admin\AppData\Local\Temp\ipykernel_2192\3265996366.py:8: DeprecationWarning: `product` is deprecated as of NumPy 1.25.0, and will be removed in NumPy 2.0. Please use `prod` instead. X_train = np.array(file['X_train']) C:\Users\Admin\AppData\Local\Temp\ipykernel_2192\3265996366.py:9: DeprecationWarning: `product` is deprecated as of NumPy 1.25.0, and will be removed in NumPy 2.0. Please use `prod` instead. X_val = np.array(file['X_val']) C:\Users\Admin\AppData\Local\Temp\ipykernel_2192\3265996366.py:10: DeprecationWarning: `product` is deprecated as of NumPy 1.25.0, and will be removed in NumPy 2.0. Please use `prod` instead. y_test = np.array(file['y_test']) C:\Users\Admin\AppData\Local\Temp\ipykernel_2192\3265996366.py:11: DeprecationWarning: `product` is deprecated as of NumPy 1.25.0, and will be removed in NumPy 2.0. Please use `prod` instead. y_train = np.array(file['y_train']) C:\Users\Admin\AppData\Local\Temp\ipykernel_2192\3265996366.py:12: DeprecationWarning: `product` is deprecated as of NumPy 1.25.0, and will be removed in NumPy 2.0. Please use `prod` instead. y_val = np.array(file['y_val'])
total_samples = len(X_train) + len(X_test)
train_percentage = len(X_train) / total_samples * 100
test_percentage = len(X_test) / total_samples * 100
print(f"Training set percentage: {train_percentage:.2f}%")
print(f"Testing set percentage: {test_percentage:.2f}%")
Training set percentage: 70.00% Testing set percentage: 30.00%
print("X_train shape:", X_train.shape)
print("X_test shape:", X_test.shape)
print("y_train shape:", y_train.shape)
print("y_test shape:", y_test.shape)
# Verify that the shapes of X and y match
assert X_train.shape[0] == y_train.shape[0], "X and y shapes don't match in training set"
assert X_test.shape[0] == y_test.shape[0], "X and y shapes don't match in testing set"
X_train shape: (42000, 32, 32) X_test shape: (18000, 32, 32) y_train shape: (42000,) y_test shape: (18000,)
# Iterate through the first 10 images and labels
for i in range(10):
image = X_train[i] # Get the i-th image
label = y_train[i] # Get the i-th label
# Display the image
plt.figure()
plt.imshow(image, cmap='gray')
plt.title(f"Label: {label}")
plt.show()
# Flatten the images
image_vector_size = 32*32
x_train = X_train.reshape(X_train.shape[0], image_vector_size)
x_test = X_test.reshape(X_test.shape[0], image_vector_size)
# # normalize inputs from 0-255 to 0-1
X_train = X_train / 255.0
X_test = X_test / 255.0
print('Training set', X_train.shape, y_train.shape)
print('Test set', X_test.shape, y_test.shape)
Training set (42000, 32, 32) (42000,) Test set (18000, 32, 32) (18000,)
from tensorflow.keras.utils import to_categorical
# Convert to "one-hot" vectors using the to_categorical function
num_classes = 10
y_train = to_categorical(y_train, num_classes)
y_test_cat=to_categorical(y_test,num_classes)
num_classes = len(np.unique(y_train))
print("Total number of classes in the dataset:", num_classes)
Total number of classes in the dataset: 2
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, BatchNormalization, Dropout
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.losses import CategoricalCrossentropy
from tensorflow.keras.callbacks import ModelCheckpoint, ReduceLROnPlateau
from tensorflow.keras import optimizers
# Create model
image_size = 32*32 #Image size was found to be 32*32
model = Sequential()
model.add(Dense(256, activation='relu', kernel_initializer='he_uniform', input_shape=(image_size,)))
model.add(BatchNormalization())
model.add(Dense(64, activation='relu', kernel_initializer='he_uniform'))
model.add(BatchNormalization())
model.add(Dense(64, activation='relu', kernel_initializer='he_uniform'))
model.add(BatchNormalization())
model.add(Dense(32, activation='relu', kernel_initializer='he_uniform'))
model.add(Dropout(0.3))
model.add(Dense(10, activation='softmax'))
adam = Adam(learning_rate=1e-3)
model.compile(loss=CategoricalCrossentropy(), optimizer=adam, metrics=['accuracy'])
checkpoint = ModelCheckpoint("model_weights.h5", monitor='val_accuracy', save_weights_only=True, mode='max', verbose=1)
reduce_lr = ReduceLROnPlateau(monitor='val_loss', factor=0.1, patience=2, min_lr=0.00001, verbose=1)
early_stopping = EarlyStopping(monitor='val_loss', patience=5, restore_best_weights=True, verbose=1)
callbacks = [checkpoint, reduce_lr, early_stopping]
history = model.fit(x_train, y_train, validation_split=0.2, epochs=10, batch_size=128, verbose=2, callbacks=callbacks)
Epoch 1/10 Epoch 1: saving model to model_weights.h5 263/263 - 5s - loss: 2.0056 - accuracy: 0.3026 - val_loss: 1.8116 - val_accuracy: 0.3743 - lr: 0.0010 - 5s/epoch - 21ms/step Epoch 2/10 Epoch 2: saving model to model_weights.h5 263/263 - 6s - loss: 1.3567 - accuracy: 0.5595 - val_loss: 1.7638 - val_accuracy: 0.4024 - lr: 0.0010 - 6s/epoch - 21ms/step Epoch 3/10 Epoch 3: saving model to model_weights.h5 263/263 - 30s - loss: 1.1611 - accuracy: 0.6312 - val_loss: 1.4934 - val_accuracy: 0.4983 - lr: 0.0010 - 30s/epoch - 115ms/step Epoch 4/10 Epoch 4: saving model to model_weights.h5 263/263 - 4s - loss: 1.0553 - accuracy: 0.6708 - val_loss: 1.3107 - val_accuracy: 0.5638 - lr: 0.0010 - 4s/epoch - 16ms/step Epoch 5/10 Epoch 5: saving model to model_weights.h5 263/263 - 4s - loss: 0.9814 - accuracy: 0.6945 - val_loss: 1.4866 - val_accuracy: 0.5421 - lr: 0.0010 - 4s/epoch - 14ms/step Epoch 6/10 Epoch 6: saving model to model_weights.h5 Epoch 6: ReduceLROnPlateau reducing learning rate to 0.00010000000474974513. 263/263 - 3s - loss: 0.9291 - accuracy: 0.7108 - val_loss: 1.6140 - val_accuracy: 0.5010 - lr: 0.0010 - 3s/epoch - 13ms/step Epoch 7/10 Epoch 7: saving model to model_weights.h5 263/263 - 3s - loss: 0.8401 - accuracy: 0.7375 - val_loss: 0.7963 - val_accuracy: 0.7636 - lr: 1.0000e-04 - 3s/epoch - 12ms/step Epoch 8/10 Epoch 8: saving model to model_weights.h5 263/263 - 3s - loss: 0.8174 - accuracy: 0.7493 - val_loss: 0.8169 - val_accuracy: 0.7625 - lr: 1.0000e-04 - 3s/epoch - 13ms/step Epoch 9/10 Epoch 9: saving model to model_weights.h5 Epoch 9: ReduceLROnPlateau reducing learning rate to 1.0000000474974514e-05. 263/263 - 3s - loss: 0.8026 - accuracy: 0.7526 - val_loss: 0.8042 - val_accuracy: 0.7694 - lr: 1.0000e-04 - 3s/epoch - 11ms/step Epoch 10/10 Epoch 10: saving model to model_weights.h5 263/263 - 3s - loss: 0.7854 - accuracy: 0.7584 - val_loss: 0.7734 - val_accuracy: 0.7820 - lr: 1.0000e-05 - 3s/epoch - 11ms/step
# predicting the model on test data
y_pred=model.predict(x_test)
563/563 [==============================] - 2s 3ms/step
y_pred[0]
array([0.3143543 , 0.04093155, 0.08879722, 0.04775522, 0.04015042,
0.03665829, 0.09418209, 0.03209357, 0.15201089, 0.15306646],
dtype=float32)
# As our outputs are probabilities so we will try to get the output class from these probablities by getting the maximum value
y_pred_final=[]
for i in y_pred:
y_pred_final.append(np.argmax(i))
y_pred_final[0]
0
from sklearn.metrics import classification_report
print(classification_report(y_test,y_pred_final))
precision recall f1-score support
0 0.82 0.81 0.81 1814
1 0.75 0.84 0.79 1828
2 0.81 0.79 0.80 1803
3 0.73 0.72 0.72 1719
4 0.83 0.82 0.83 1812
5 0.74 0.78 0.76 1768
6 0.79 0.79 0.79 1832
7 0.81 0.81 0.81 1808
8 0.78 0.71 0.75 1812
9 0.75 0.74 0.74 1804
accuracy 0.78 18000
macro avg 0.78 0.78 0.78 18000
weighted avg 0.78 0.78 0.78 18000
from sklearn.metrics import confusion_matrix
import seaborn as sns
cm=confusion_matrix(y_test,y_pred_final)
plt.figure(figsize=(10,7))
sns.heatmap(cm,annot=True,fmt='d')
plt.xlabel('Predicted')
plt.ylabel('Truth')
plt.show()
index = 0
misclassified_images = []
for label, predict in zip(y_test, y_pred_final):
if label != predict:
misclassified_images.append(index)
index +=1
print(misclassified_images)
print(len(misclassified_images))
[0, 1, 7, 8, 11, 12, 18, 23, 25, 26, 27, 29, 40, 45, 46, 56, 58, 64, 67, 68, 79, 89, 90, 92, 97, 100, 108, 109, 114, 132, 136, 137, 138, 140, 142, 149, 150, 151, 154, 155, 158, 159, 176, 182, 183, 184, 185, 186, 188, 194, 199, 203, 210, 215, 222, 223, 228, 230, 232, 246, 251, 252, 253, 267, 268, 270, 274, 278, 300, 303, 304, 307, 323, 346, 347, 351, 355, 360, 363, 370, 372, 374, 380, 396, 398, 402, 403, 404, 407, 411, 415, 418, 426, 432, 434, 437, 446, 448, 450, 451, 456, 457, 485, 489, 490, 496, 497, 498, 500, 503, 504, 505, 507, 510, 514, 516, 520, 523, 524, 527, 529, 530, 532, 535, 537, 543, 550, 551, 556, 560, 562, 563, 586, 590, 592, 612, 619, 620, 624, 630, 633, 640, 648, 652, 663, 664, 665, 671, 672, 690, 691, 694, 700, 704, 706, 715, 717, 728, 738, 753, 760, 768, 771, 777, 780, 783, 784, 787, 795, 809, 811, 813, 815, 838, 843, 849, 856, 859, 860, 865, 867, 869, 871, 872, 873, 875, 882, 887, 889, 891, 895, 896, 907, 909, 926, 935, 941, 942, 946, 948, 949, 951, 954, 955, 956, 965, 970, 983, 985, 991, 995, 997, 1006, 1009, 1013, 1019, 1026, 1029, 1035, 1040, 1041, 1045, 1046, 1050, 1051, 1057, 1060, 1063, 1068, 1073, 1074, 1078, 1080, 1085, 1086, 1087, 1089, 1091, 1101, 1106, 1109, 1112, 1126, 1132, 1133, 1137, 1139, 1142, 1147, 1148, 1151, 1154, 1155, 1159, 1175, 1181, 1182, 1189, 1200, 1204, 1205, 1207, 1220, 1227, 1232, 1238, 1244, 1252, 1260, 1277, 1279, 1280, 1281, 1291, 1292, 1296, 1301, 1313, 1318, 1320, 1323, 1341, 1344, 1348, 1353, 1355, 1356, 1357, 1363, 1366, 1375, 1376, 1377, 1382, 1390, 1407, 1410, 1414, 1418, 1420, 1421, 1425, 1429, 1431, 1433, 1437, 1449, 1452, 1466, 1482, 1484, 1489, 1494, 1495, 1501, 1507, 1523, 1525, 1532, 1537, 1540, 1546, 1549, 1552, 1559, 1562, 1569, 1570, 1571, 1574, 1580, 1582, 1584, 1585, 1591, 1596, 1604, 1606, 1613, 1615, 1622, 1627, 1629, 1635, 1637, 1645, 1653, 1655, 1663, 1674, 1675, 1676, 1688, 1692, 1700, 1705, 1722, 1727, 1734, 1735, 1746, 1747, 1749, 1750, 1754, 1756, 1762, 1770, 1818, 1820, 1827, 1837, 1838, 1846, 1848, 1850, 1863, 1865, 1873, 1875, 1878, 1889, 1890, 1891, 1892, 1900, 1902, 1905, 1906, 1907, 1913, 1914, 1922, 1930, 1936, 1939, 1942, 1943, 1948, 1952, 1955, 1957, 1964, 1971, 1973, 1979, 1983, 1985, 1990, 1995, 1996, 2001, 2005, 2008, 2011, 2020, 2023, 2025, 2026, 2030, 2031, 2041, 2050, 2055, 2056, 2060, 2064, 2066, 2070, 2072, 2074, 2076, 2078, 2081, 2083, 2099, 2102, 2107, 2115, 2116, 2127, 2128, 2152, 2158, 2159, 2163, 2167, 2168, 2172, 2178, 2184, 2191, 2205, 2210, 2212, 2218, 2219, 2222, 2226, 2234, 2236, 2245, 2248, 2263, 2278, 2290, 2291, 2294, 2297, 2306, 2309, 2320, 2328, 2329, 2335, 2336, 2338, 2342, 2345, 2349, 2353, 2363, 2368, 2370, 2381, 2387, 2388, 2398, 2405, 2406, 2412, 2416, 2417, 2419, 2424, 2430, 2437, 2438, 2439, 2441, 2449, 2450, 2454, 2456, 2459, 2462, 2464, 2468, 2469, 2474, 2477, 2479, 2489, 2493, 2495, 2506, 2507, 2509, 2510, 2511, 2513, 2514, 2522, 2523, 2524, 2532, 2537, 2542, 2549, 2550, 2552, 2553, 2555, 2559, 2565, 2574, 2582, 2591, 2593, 2595, 2596, 2600, 2601, 2603, 2605, 2608, 2611, 2614, 2617, 2619, 2620, 2622, 2624, 2625, 2628, 2632, 2633, 2634, 2636, 2639, 2648, 2651, 2654, 2659, 2661, 2668, 2670, 2675, 2691, 2692, 2700, 2703, 2706, 2707, 2711, 2717, 2718, 2722, 2725, 2731, 2733, 2735, 2739, 2749, 2757, 2763, 2766, 2775, 2780, 2794, 2796, 2802, 2804, 2807, 2810, 2811, 2812, 2818, 2822, 2832, 2834, 2845, 2851, 2852, 2853, 2855, 2856, 2858, 2859, 2873, 2875, 2879, 2880, 2881, 2893, 2900, 2903, 2911, 2912, 2923, 2925, 2926, 2927, 2928, 2931, 2933, 2934, 2937, 2945, 2953, 2957, 2962, 2971, 2979, 2985, 2986, 2990, 2996, 2999, 3006, 3012, 3013, 3014, 3018, 3021, 3022, 3031, 3036, 3041, 3047, 3050, 3058, 3060, 3066, 3069, 3072, 3073, 3081, 3082, 3083, 3084, 3091, 3097, 3102, 3112, 3119, 3122, 3123, 3128, 3130, 3132, 3136, 3146, 3147, 3150, 3153, 3157, 3159, 3160, 3163, 3166, 3167, 3168, 3169, 3170, 3173, 3175, 3183, 3186, 3190, 3200, 3205, 3219, 3221, 3227, 3234, 3235, 3243, 3245, 3250, 3254, 3257, 3261, 3268, 3275, 3276, 3277, 3288, 3290, 3291, 3297, 3313, 3328, 3330, 3331, 3332, 3334, 3338, 3340, 3343, 3345, 3372, 3374, 3379, 3383, 3386, 3393, 3394, 3399, 3402, 3404, 3418, 3420, 3424, 3428, 3430, 3432, 3434, 3435, 3436, 3439, 3442, 3443, 3448, 3451, 3454, 3455, 3463, 3469, 3471, 3474, 3477, 3496, 3507, 3516, 3521, 3526, 3534, 3539, 3542, 3556, 3564, 3576, 3589, 3592, 3595, 3596, 3608, 3624, 3626, 3642, 3643, 3650, 3652, 3662, 3674, 3686, 3687, 3692, 3694, 3697, 3698, 3705, 3711, 3715, 3721, 3723, 3725, 3736, 3737, 3743, 3752, 3755, 3757, 3764, 3765, 3768, 3769, 3772, 3779, 3786, 3792, 3793, 3797, 3799, 3809, 3810, 3812, 3813, 3816, 3819, 3821, 3823, 3833, 3842, 3853, 3854, 3864, 3880, 3888, 3892, 3895, 3903, 3906, 3907, 3915, 3916, 3922, 3928, 3930, 3936, 3938, 3951, 3952, 3958, 3976, 3977, 3989, 3992, 4007, 4021, 4027, 4030, 4034, 4035, 4044, 4045, 4046, 4061, 4063, 4068, 4072, 4075, 4078, 4083, 4091, 4095, 4122, 4130, 4135, 4139, 4143, 4145, 4146, 4150, 4156, 4157, 4163, 4164, 4167, 4170, 4172, 4175, 4177, 4179, 4181, 4182, 4183, 4184, 4186, 4194, 4197, 4205, 4209, 4214, 4216, 4217, 4218, 4219, 4226, 4235, 4238, 4241, 4247, 4248, 4249, 4251, 4255, 4260, 4267, 4279, 4296, 4300, 4301, 4302, 4305, 4315, 4328, 4335, 4339, 4343, 4344, 4350, 4352, 4353, 4354, 4355, 4363, 4373, 4376, 4380, 4388, 4390, 4395, 4397, 4401, 4409, 4418, 4428, 4431, 4434, 4446, 4457, 4459, 4461, 4462, 4466, 4470, 4483, 4487, 4500, 4502, 4509, 4516, 4517, 4528, 4534, 4539, 4553, 4557, 4560, 4567, 4569, 4572, 4574, 4588, 4607, 4610, 4620, 4621, 4622, 4623, 4624, 4628, 4632, 4635, 4638, 4641, 4642, 4644, 4645, 4647, 4648, 4651, 4652, 4653, 4655, 4659, 4660, 4661, 4662, 4667, 4673, 4675, 4679, 4687, 4693, 4695, 4699, 4700, 4708, 4709, 4712, 4723, 4727, 4728, 4730, 4735, 4742, 4743, 4744, 4752, 4755, 4756, 4765, 4770, 4776, 4779, 4787, 4789, 4798, 4812, 4815, 4816, 4818, 4822, 4824, 4827, 4832, 4833, 4835, 4838, 4839, 4840, 4853, 4854, 4858, 4862, 4863, 4875, 4878, 4879, 4886, 4887, 4888, 4891, 4892, 4899, 4900, 4902, 4913, 4916, 4927, 4932, 4934, 4949, 4960, 4984, 4985, 4989, 4990, 4994, 5000, 5003, 5010, 5033, 5036, 5046, 5049, 5057, 5058, 5062, 5068, 5072, 5075, 5085, 5088, 5091, 5097, 5108, 5117, 5147, 5148, 5154, 5159, 5161, 5175, 5179, 5182, 5186, 5188, 5192, 5199, 5203, 5211, 5214, 5215, 5220, 5222, 5225, 5227, 5239, 5240, 5244, 5245, 5246, 5247, 5256, 5260, 5264, 5269, 5282, 5285, 5291, 5299, 5302, 5309, 5312, 5317, 5318, 5319, 5328, 5335, 5338, 5340, 5345, 5353, 5364, 5365, 5366, 5370, 5373, 5391, 5393, 5394, 5398, 5399, 5400, 5416, 5417, 5424, 5428, 5432, 5433, 5436, 5439, 5451, 5454, 5467, 5476, 5485, 5494, 5499, 5500, 5505, 5511, 5513, 5523, 5530, 5532, 5533, 5534, 5539, 5541, 5549, 5556, 5557, 5559, 5563, 5564, 5566, 5589, 5591, 5592, 5596, 5602, 5609, 5616, 5627, 5628, 5630, 5637, 5639, 5650, 5656, 5658, 5660, 5661, 5665, 5666, 5668, 5671, 5674, 5675, 5676, 5679, 5680, 5688, 5691, 5697, 5704, 5710, 5715, 5716, 5721, 5724, 5729, 5742, 5748, 5762, 5766, 5767, 5771, 5773, 5776, 5777, 5778, 5779, 5783, 5784, 5788, 5791, 5793, 5796, 5803, 5805, 5814, 5819, 5823, 5829, 5835, 5837, 5838, 5840, 5841, 5845, 5847, 5852, 5853, 5857, 5859, 5865, 5875, 5878, 5880, 5919, 5924, 5928, 5929, 5933, 5934, 5935, 5945, 5948, 5953, 5955, 5968, 5972, 5976, 5979, 5987, 5991, 5992, 5996, 5998, 6000, 6003, 6006, 6012, 6028, 6030, 6049, 6056, 6057, 6058, 6063, 6069, 6073, 6075, 6080, 6081, 6095, 6096, 6099, 6100, 6106, 6108, 6112, 6114, 6118, 6119, 6127, 6132, 6139, 6144, 6145, 6152, 6154, 6166, 6175, 6181, 6184, 6194, 6195, 6197, 6199, 6200, 6203, 6204, 6205, 6207, 6218, 6219, 6221, 6223, 6224, 6226, 6231, 6235, 6243, 6253, 6262, 6264, 6265, 6266, 6269, 6270, 6273, 6277, 6278, 6279, 6280, 6283, 6285, 6289, 6291, 6295, 6302, 6304, 6308, 6309, 6310, 6315, 6318, 6320, 6322, 6331, 6334, 6336, 6338, 6343, 6350, 6353, 6360, 6364, 6365, 6366, 6370, 6371, 6375, 6378, 6381, 6384, 6385, 6387, 6388, 6402, 6405, 6412, 6421, 6427, 6434, 6436, 6440, 6441, 6443, 6445, 6447, 6458, 6467, 6475, 6483, 6486, 6488, 6489, 6491, 6492, 6497, 6500, 6507, 6514, 6520, 6521, 6524, 6531, 6542, 6549, 6560, 6565, 6569, 6571, 6581, 6587, 6588, 6598, 6602, 6603, 6605, 6606, 6609, 6611, 6612, 6629, 6648, 6655, 6661, 6662, 6665, 6671, 6677, 6678, 6679, 6684, 6686, 6687, 6701, 6707, 6713, 6715, 6716, 6725, 6737, 6738, 6756, 6759, 6760, 6761, 6764, 6770, 6775, 6777, 6785, 6788, 6790, 6804, 6805, 6807, 6808, 6809, 6810, 6818, 6821, 6832, 6835, 6836, 6841, 6842, 6852, 6853, 6855, 6856, 6860, 6861, 6875, 6878, 6882, 6887, 6889, 6891, 6893, 6894, 6896, 6898, 6902, 6911, 6912, 6913, 6917, 6918, 6921, 6931, 6933, 6934, 6935, 6939, 6940, 6941, 6947, 6949, 6952, 6960, 6970, 6971, 6982, 6987, 6992, 6996, 7003, 7007, 7035, 7040, 7043, 7056, 7062, 7063, 7064, 7065, 7071, 7078, 7082, 7092, 7093, 7096, 7097, 7106, 7108, 7120, 7122, 7124, 7127, 7128, 7141, 7143, 7146, 7154, 7155, 7158, 7168, 7173, 7176, 7177, 7179, 7181, 7184, 7187, 7189, 7190, 7201, 7209, 7217, 7218, 7221, 7222, 7226, 7230, 7231, 7235, 7246, 7251, 7253, 7259, 7264, 7268, 7289, 7293, 7303, 7305, 7307, 7318, 7324, 7325, 7336, 7341, 7342, 7343, 7346, 7347, 7351, 7352, 7355, 7360, 7365, 7369, 7372, 7374, 7377, 7398, 7399, 7404, 7411, 7419, 7424, 7429, 7431, 7432, 7434, 7435, 7436, 7441, 7449, 7450, 7456, 7466, 7471, 7476, 7477, 7482, 7484, 7485, 7488, 7499, 7500, 7504, 7507, 7511, 7515, 7526, 7527, 7530, 7532, 7544, 7545, 7556, 7561, 7571, 7572, 7582, 7584, 7587, 7592, 7595, 7597, 7602, 7617, 7618, 7619, 7623, 7624, 7629, 7630, 7631, 7632, 7633, 7634, 7636, 7642, 7644, 7655, 7658, 7662, 7672, 7681, 7683, 7686, 7687, 7689, 7692, 7705, 7707, 7724, 7734, 7753, 7755, 7761, 7765, 7766, 7769, 7772, 7778, 7781, 7789, 7797, 7800, 7801, 7817, 7829, 7834, 7835, 7838, 7841, 7843, 7845, 7846, 7851, 7863, 7864, 7866, 7871, 7877, 7878, 7879, 7883, 7886, 7888, 7895, 7898, 7899, 7901, 7903, 7907, 7908, 7909, 7920, 7921, 7926, 7927, 7934, 7941, 7944, 7948, 7959, 7968, 7969, 7971, 7974, 7975, 7984, 7997, 8001, 8010, 8014, 8028, 8029, 8031, 8032, 8039, 8040, 8042, 8044, 8046, 8047, 8051, 8055, 8056, 8057, 8058, 8068, 8072, 8076, 8077, 8084, 8091, 8094, 8095, 8096, 8100, 8117, 8118, 8120, 8127, 8132, 8147, 8158, 8161, 8165, 8175, 8179, 8182, 8184, 8187, 8189, 8191, 8193, 8194, 8206, 8208, 8209, 8222, 8225, 8226, 8228, 8229, 8233, 8235, 8242, 8245, 8246, 8247, 8249, 8251, 8252, 8255, 8257, 8260, 8269, 8270, 8285, 8286, 8293, 8294, 8302, 8308, 8310, 8314, 8318, 8328, 8329, 8333, 8344, 8348, 8351, 8368, 8387, 8398, 8414, 8415, 8418, 8426, 8433, 8438, 8450, 8454, 8461, 8467, 8484, 8489, 8499, 8503, 8507, 8511, 8515, 8517, 8520, 8524, 8534, 8535, 8536, 8537, 8539, 8545, 8546, 8553, 8563, 8567, 8569, 8577, 8579, 8592, 8603, 8606, 8607, 8608, 8610, 8617, 8622, 8625, 8629, 8630, 8637, 8639, 8641, 8652, 8657, 8658, 8673, 8676, 8678, 8680, 8686, 8688, 8691, 8696, 8704, 8706, 8707, 8709, 8714, 8723, 8728, 8733, 8734, 8739, 8740, 8741, 8742, 8745, 8759, 8783, 8787, 8788, 8793, 8800, 8806, 8807, 8809, 8815, 8820, 8834, 8839, 8840, 8843, 8846, 8847, 8850, 8852, 8856, 8861, 8862, 8864, 8865, 8874, 8881, 8882, 8884, 8893, 8895, 8899, 8928, 8929, 8935, 8936, 8937, 8939, 8944, 8947, 8949, 8952, 8954, 8959, 8961, 8962, 8964, 8968, 8971, 8972, 8975, 8978, 8984, 8997, 8999, 9003, 9004, 9005, 9008, 9010, 9011, 9017, 9019, 9023, 9025, 9035, 9044, 9051, 9055, 9060, 9063, 9076, 9078, 9079, 9080, 9082, 9086, 9090, 9093, 9099, 9100, 9101, 9105, 9107, 9108, 9110, 9114, 9123, 9125, 9133, 9141, 9150, 9158, 9160, 9163, 9175, 9176, 9177, 9183, 9187, 9201, 9205, 9211, 9213, 9214, 9225, 9235, 9238, 9243, 9250, 9260, 9267, 9275, 9279, 9281, 9282, 9285, 9287, 9291, 9296, 9297, 9301, 9310, 9315, 9316, 9320, 9321, 9322, 9324, 9325, 9328, 9342, 9344, 9345, 9346, 9356, 9358, 9376, 9382, 9388, 9395, 9396, 9399, 9403, 9405, 9415, 9418, 9423, 9424, 9426, 9427, 9429, 9431, 9433, 9437, 9438, 9442, 9448, 9450, 9460, 9467, 9472, 9480, 9481, 9482, 9494, 9496, 9502, 9503, 9512, 9515, 9517, 9523, 9526, 9535, 9536, 9537, 9540, 9543, 9545, 9546, 9550, 9553, 9575, 9582, 9605, 9606, 9608, 9609, 9615, 9625, 9631, 9634, 9635, 9640, 9641, 9642, 9645, 9651, 9663, 9664, 9665, 9668, 9669, 9674, 9675, 9676, 9677, 9683, 9685, 9700, 9708, 9720, 9722, 9725, 9729, 9731, 9740, 9744, 9746, 9750, 9752, 9753, 9762, 9764, 9765, 9774, 9785, 9789, 9799, 9809, 9812, 9828, 9830, 9832, 9843, 9845, 9849, 9850, 9851, 9854, 9863, 9870, 9872, 9876, 9881, 9899, 9901, 9902, 9904, 9905, 9906, 9911, 9913, 9921, 9923, 9926, 9934, 9937, 9940, 9943, 9950, 9963, 9969, 9970, 9977, 9979, 9982, 9988, 9996, 9997, 10001, 10002, 10010, 10029, 10030, 10038, 10040, 10046, 10047, 10052, 10057, 10070, 10071, 10072, 10075, 10081, 10084, 10085, 10086, 10087, 10088, 10090, 10095, 10098, 10099, 10103, 10109, 10115, 10123, 10130, 10136, 10141, 10146, 10147, 10151, 10152, 10155, 10161, 10162, 10167, 10171, 10180, 10186, 10188, 10189, 10190, 10192, 10193, 10194, 10197, 10203, 10207, 10210, 10213, 10217, 10219, 10235, 10241, 10243, 10251, 10262, 10265, 10268, 10271, 10272, 10282, 10285, 10296, 10297, 10302, 10305, 10310, 10316, 10319, 10322, 10326, 10329, 10333, 10342, 10347, 10350, 10365, 10368, 10371, 10375, 10377, 10378, 10379, 10386, 10391, 10398, 10400, 10407, 10410, 10412, 10415, 10419, 10420, 10424, 10428, 10429, 10433, 10434, 10438, 10443, 10445, 10446, 10447, 10449, 10451, 10460, 10462, 10468, 10472, 10473, 10479, 10483, 10487, 10490, 10492, 10494, 10495, 10496, 10502, 10506, 10508, 10520, 10529, 10536, 10538, 10543, 10552, 10558, 10559, 10565, 10566, 10572, 10583, 10585, 10594, 10596, 10604, 10607, 10609, 10614, 10615, 10620, 10627, 10629, 10635, 10641, 10651, 10657, 10658, 10659, 10667, 10670, 10671, 10672, 10674, 10678, 10689, 10691, 10695, 10701, 10705, 10706, 10716, 10717, 10718, 10730, 10738, 10740, 10742, 10745, 10748, 10751, 10753, 10759, 10761, 10765, 10766, 10768, 10771, 10772, 10774, 10786, 10790, 10795, 10807, 10810, 10816, 10824, 10826, 10831, 10836, 10852, 10853, 10854, 10856, 10865, 10872, 10873, 10875, 10884, 10892, 10900, 10904, 10908, 10911, 10927, 10932, 10935, 10937, 10945, 10946, 10953, 10958, 10971, 10974, 10978, 10989, 10990, 10991, 10995, 11000, 11003, 11008, 11013, 11014, 11023, 11025, 11026, 11037, 11045, 11053, 11054, 11064, 11066, 11068, 11079, 11088, 11089, 11096, 11106, 11113, 11116, 11119, 11122, 11125, 11134, 11135, 11139, 11153, 11164, 11169, 11173, 11179, 11187, 11193, 11194, 11196, 11197, 11202, 11210, 11217, 11219, 11231, 11239, 11240, 11241, 11243, 11247, 11250, 11277, 11290, 11294, 11295, 11304, 11305, 11307, 11313, 11314, 11318, 11319, 11334, 11340, 11351, 11353, 11355, 11362, 11363, 11364, 11366, 11372, 11387, 11393, 11394, 11395, 11396, 11397, 11402, 11414, 11417, 11418, 11422, 11425, 11429, 11432, 11434, 11435, 11437, 11440, 11456, 11466, 11470, 11474, 11478, 11480, 11485, 11490, 11496, 11499, 11502, 11503, 11504, 11511, 11512, 11513, 11514, 11521, 11523, 11527, 11529, 11530, 11537, 11538, 11541, 11542, 11543, 11546, 11547, 11549, 11550, 11552, 11554, 11555, 11559, 11563, 11564, 11567, 11568, 11569, 11570, 11573, 11576, 11578, 11584, 11585, 11586, 11592, 11601, 11610, 11615, 11622, 11639, 11641, 11643, 11646, 11652, 11655, 11656, 11659, 11660, 11663, 11670, 11671, 11674, 11675, 11687, 11691, 11703, 11704, 11709, 11716, 11727, 11733, 11734, 11735, 11736, 11739, 11741, 11744, 11747, 11749, 11757, 11760, 11765, 11766, 11771, 11774, 11777, 11785, 11787, 11789, 11798, 11799, 11805, 11812, 11819, 11822, 11836, 11842, 11845, 11851, 11857, 11864, 11865, 11872, 11873, 11882, 11883, 11886, 11891, 11901, 11906, 11918, 11924, 11926, 11933, 11938, 11949, 11952, 11960, 11964, 11973, 11974, 11975, 11978, 11981, 11994, 11999, 12017, 12018, 12025, 12026, 12029, 12031, 12033, 12037, 12039, 12043, 12046, 12056, 12057, 12062, 12072, 12085, 12090, 12093, 12095, 12098, 12102, 12108, 12110, 12112, 12113, 12115, 12119, 12121, 12124, 12127, 12129, 12130, 12138, 12141, 12149, 12150, 12151, 12154, 12155, 12157, 12158, 12159, 12161, 12179, 12197, 12198, 12203, 12205, 12207, 12209, 12213, 12216, 12218, 12228, 12229, 12233, 12238, 12243, 12246, 12249, 12256, 12271, 12275, 12281, 12284, 12285, 12291, 12297, 12302, 12310, 12329, 12334, 12335, 12338, 12350, 12351, 12361, 12363, 12366, 12367, 12373, 12380, 12400, 12410, 12411, 12413, 12414, 12415, 12419, 12423, 12431, 12433, 12434, 12438, 12447, 12449, 12450, 12451, 12459, 12460, 12462, 12463, 12469, 12472, 12490, 12491, 12496, 12498, 12502, 12514, 12517, 12522, 12525, 12536, 12537, 12541, 12542, 12544, 12546, 12548, 12553, 12555, 12580, 12583, 12587, 12588, 12590, 12594, 12597, 12599, 12600, 12602, 12609, 12612, 12617, 12621, 12622, 12629, 12631, 12645, 12652, 12656, 12657, 12660, 12666, 12674, 12690, 12694, 12695, 12700, 12703, 12708, 12712, 12715, 12717, 12718, 12722, 12728, 12729, 12736, 12740, 12742, 12744, 12747, 12751, 12759, 12766, 12772, 12776, 12779, 12782, 12784, 12786, 12787, 12794, 12806, 12808, 12810, 12825, 12828, 12830, 12832, 12834, 12836, 12839, 12840, 12845, 12855, 12864, 12865, 12866, 12868, 12870, 12874, 12883, 12887, 12888, 12894, 12899, 12902, 12903, 12906, 12909, 12910, 12915, 12922, 12930, 12933, 12940, 12941, 12944, 12947, 12955, 12968, 12969, 12971, 12973, 12978, 12979, 12981, 12986, 12991, 12994, 13002, 13005, 13007, 13018, 13019, 13025, 13028, 13045, 13049, 13051, 13053, 13056, 13061, 13062, 13063, 13066, 13068, 13069, 13076, 13078, 13079, 13085, 13089, 13093, 13097, 13104, 13107, 13109, 13110, 13111, 13118, 13124, 13147, 13155, 13158, 13160, 13165, 13166, 13171, 13179, 13180, 13181, 13190, 13194, 13195, 13207, 13208, 13212, 13220, 13223, 13225, 13231, 13234, 13237, 13241, 13251, 13252, 13253, 13266, 13270, 13273, 13296, 13298, 13303, 13306, 13315, 13316, 13321, 13325, 13328, 13330, 13333, 13336, 13337, 13351, 13353, 13354, 13355, 13357, 13369, 13375, 13376, 13395, 13397, 13398, 13401, 13402, 13409, 13415, 13417, 13418, 13419, 13424, 13427, 13428, 13438, 13439, 13447, 13457, 13463, 13467, 13468, 13471, 13472, 13474, 13487, 13488, 13495, 13496, 13499, 13503, 13505, 13509, 13520, 13528, 13530, 13535, 13537, 13540, 13543, 13545, 13548, 13563, 13572, 13574, 13586, 13589, 13592, 13597, 13604, 13608, 13610, 13615, 13618, 13621, 13624, 13628, 13630, 13632, 13636, 13637, 13663, 13668, 13674, 13676, 13678, 13684, 13689, 13692, 13693, 13701, 13702, 13709, 13711, 13724, 13731, 13737, 13745, 13747, 13748, 13749, 13755, 13759, 13763, 13765, 13770, 13773, 13775, 13776, 13777, 13784, 13786, 13788, 13789, 13794, 13796, 13802, 13803, 13806, 13808, 13811, 13823, 13827, 13828, 13831, 13849, 13850, 13857, 13861, 13867, 13872, 13894, 13900, 13907, 13908, 13913, 13917, 13919, 13921, 13938, 13945, 13948, 13952, 13955, 13960, 13961, 13962, 13963, 13964, 13965, 13966, 13969, 13984, 13998, 14004, 14006, 14012, 14018, 14025, 14026, 14034, 14036, 14040, 14044, 14050, 14053, 14055, 14067, 14068, 14074, 14075, 14076, 14082, 14090, 14091, 14097, 14098, 14100, 14106, 14108, 14127, 14141, 14152, 14168, 14171, 14176, 14181, 14182, 14191, 14193, 14194, 14195, 14202, 14205, 14207, 14208, 14209, 14211, 14218, 14220, 14237, 14240, 14246, 14254, 14262, 14265, 14267, 14274, 14283, 14285, 14288, 14290, 14295, 14303, 14307, 14308, 14310, 14311, 14314, 14316, 14319, 14323, 14324, 14325, 14326, 14328, 14331, 14336, 14345, 14351, 14355, 14356, 14364, 14369, 14371, 14373, 14380, 14387, 14389, 14394, 14395, 14398, 14400, 14402, 14415, 14419, 14427, 14430, 14433, 14437, 14440, 14443, 14459, 14461, 14464, 14465, 14471, 14473, 14475, 14477, 14485, 14491, 14493, 14494, 14500, 14503, 14509, 14520, 14527, 14529, 14536, 14540, 14541, 14546, 14548, 14550, 14561, 14567, 14572, 14579, 14582, 14599, 14609, 14611, 14612, 14616, 14617, 14619, 14625, 14628, 14631, 14635, 14649, 14651, 14660, 14665, 14666, 14668, 14675, 14677, 14679, 14681, 14683, 14687, 14688, 14695, 14697, 14698, 14701, 14702, 14705, 14709, 14711, 14712, 14717, 14725, 14732, 14741, 14742, 14748, 14752, 14754, 14755, 14756, 14763, 14784, 14786, 14791, 14795, 14796, 14797, 14799, 14805, 14809, 14815, 14817, 14821, 14830, 14831, 14832, 14836, 14838, 14844, 14852, 14853, 14859, 14860, 14869, 14885, 14889, 14892, 14893, 14894, 14903, 14904, 14907, 14910, 14913, 14918, 14923, 14929, 14931, 14935, 14938, 14942, 14943, 14944, 14948, 14950, 14957, 14960, 14964, 14965, 14982, 14984, 14986, 14988, 14989, 14992, 14998, 15006, 15008, 15009, 15010, 15014, 15015, 15017, 15019, 15022, 15026, 15028, 15032, 15033, 15038, 15040, 15044, 15053, 15057, 15058, 15059, 15063, 15072, 15074, 15076, 15078, 15091, 15095, 15098, 15100, 15103, 15107, 15108, 15109, 15111, 15113, 15119, 15123, 15128, 15134, 15136, 15138, 15143, 15145, 15153, 15159, 15164, 15166, 15171, 15181, 15184, 15187, 15190, 15201, 15211, 15227, 15228, 15231, 15238, 15239, 15245, 15248, 15251, 15252, 15253, 15254, 15257, 15260, 15262, 15269, 15272, 15273, 15275, 15287, 15289, 15295, 15298, 15302, 15322, 15325, 15327, 15329, 15335, 15338, 15342, 15344, 15352, 15354, 15357, 15364, 15368, 15372, 15373, 15375, 15377, 15385, 15389, 15392, 15397, 15398, 15400, 15403, 15407, 15408, 15412, 15414, 15415, 15417, 15420, 15424, 15430, 15435, 15456, 15461, 15471, 15474, 15482, 15485, 15486, 15488, 15490, 15491, 15497, 15500, 15511, 15527, 15528, 15534, 15545, 15550, 15568, 15569, 15573, 15578, 15580, 15584, 15586, 15593, 15596, 15597, 15602, 15603, 15604, 15605, 15609, 15615, 15620, 15621, 15627, 15629, 15630, 15631, 15635, 15644, 15650, 15652, 15661, 15673, 15674, 15677, 15680, 15681, 15688, 15691, 15693, 15694, 15698, 15701, 15705, 15706, 15708, 15709, 15717, 15720, 15721, 15724, 15729, 15730, 15732, 15733, 15739, 15745, 15753, 15755, 15759, 15760, 15761, 15763, 15766, 15767, 15771, 15778, 15779, 15781, 15783, 15790, 15802, 15808, 15816, 15823, 15829, 15832, 15833, 15835, 15836, 15843, 15849, 15854, 15856, 15858, 15861, 15869, 15871, 15873, 15874, 15876, 15878, 15886, 15887, 15906, 15915, 15921, 15924, 15928, 15931, 15940, 15943, 15944, 15950, 15951, 15953, 15956, 15961, 15966, 15968, 15971, 15976, 15979, 15980, 15985, 15991, 15995, 16000, 16013, 16015, 16017, 16018, 16025, 16032, 16034, 16037, 16038, 16039, 16045, 16049, 16054, 16056, 16063, 16066, 16069, 16074, 16078, 16084, 16085, 16091, 16092, 16098, 16100, 16112, 16113, 16116, 16117, 16120, 16122, 16123, 16134, 16135, 16141, 16142, 16143, 16149, 16151, 16154, 16161, 16163, 16169, 16174, 16175, 16178, 16186, 16196, 16200, 16202, 16207, 16210, 16213, 16216, 16217, 16219, 16223, 16232, 16234, 16242, 16243, 16247, 16252, 16253, 16254, 16267, 16276, 16283, 16284, 16292, 16295, 16297, 16298, 16301, 16302, 16307, 16311, 16314, 16329, 16335, 16336, 16337, 16338, 16341, 16346, 16348, 16350, 16353, 16358, 16362, 16364, 16368, 16371, 16390, 16402, 16408, 16413, 16416, 16425, 16428, 16429, 16430, 16432, 16435, 16440, 16441, 16446, 16452, 16465, 16470, 16476, 16480, 16483, 16488, 16493, 16508, 16510, 16511, 16515, 16516, 16519, 16520, 16523, 16529, 16530, 16532, 16535, 16536, 16538, 16540, 16545, 16549, 16551, 16558, 16562, 16563, 16565, 16569, 16575, 16585, 16587, 16593, 16594, 16596, 16597, 16598, 16600, 16601, 16606, 16616, 16623, 16631, 16632, 16634, 16665, 16671, 16672, 16673, 16674, 16679, 16680, 16683, 16691, 16694, 16699, 16705, 16712, 16715, 16716, 16720, 16725, 16730, 16740, 16741, 16745, 16752, 16760, 16764, 16769, 16773, 16777, 16780, 16782, 16785, 16786, 16787, 16788, 16789, 16790, 16792, 16794, 16809, 16812, 16814, 16820, 16822, 16823, 16833, 16835, 16842, 16843, 16851, 16852, 16853, 16854, 16856, 16858, 16861, 16864, 16866, 16874, 16876, 16878, 16894, 16911, 16913, 16917, 16918, 16923, 16927, 16930, 16934, 16938, 16940, 16941, 16942, 16947, 16970, 16976, 16985, 16992, 16994, 17001, 17004, 17008, 17011, 17014, 17016, 17023, 17025, 17028, 17039, 17042, 17046, 17052, 17053, 17059, 17061, 17069, 17076, 17077, 17080, 17084, 17090, 17094, 17100, 17105, 17109, 17112, 17113, 17120, 17121, 17130, 17134, 17139, 17147, 17149, 17158, 17165, 17169, 17170, 17174, 17176, 17177, 17178, 17190, 17193, 17198, 17199, 17207, 17208, 17215, 17224, 17227, 17232, 17234, 17235, 17238, 17239, 17241, 17243, 17244, 17246, 17247, 17258, 17267, 17268, 17281, 17282, 17283, 17285, 17300, 17303, 17308, 17312, 17315, 17317, 17335, 17336, 17337, 17338, 17341, 17342, 17348, 17360, 17362, 17367, 17376, 17378, 17386, 17388, 17389, 17390, 17391, 17400, 17402, 17409, 17412, 17414, 17416, 17417, 17418, 17419, 17421, 17423, 17424, 17428, 17434, 17438, 17439, 17445, 17446, 17447, 17454, 17455, 17477, 17478, 17486, 17506, 17508, 17515, 17517, 17523, 17526, 17535, 17536, 17539, 17541, 17542, 17549, 17552, 17557, 17565, 17571, 17572, 17573, 17587, 17588, 17591, 17593, 17601, 17605, 17610, 17613, 17614, 17619, 17623, 17624, 17631, 17636, 17641, 17643, 17645, 17654, 17660, 17669, 17671, 17678, 17682, 17685, 17692, 17698, 17699, 17700, 17702, 17703, 17708, 17710, 17712, 17714, 17715, 17723, 17733, 17735, 17739, 17742, 17743, 17744, 17745, 17756, 17759, 17762, 17766, 17769, 17771, 17776, 17777, 17784, 17785, 17787, 17793, 17796, 17798, 17807, 17829, 17840, 17852, 17857, 17858, 17860, 17869, 17872, 17874, 17880, 17881, 17882, 17892, 17897, 17900, 17909, 17911, 17918, 17923, 17926, 17929, 17940, 17942, 17943, 17946, 17948, 17953, 17955, 17958, 17960, 17964, 17978, 17979, 17981, 17984] 3932
image_index = 445
plt.imshow(x_test[image_index].reshape(32,32),cmap='Greys')
pred = model.predict(x_test[image_index].reshape(-1, 1024))
print("Was predicted ",pred.argmax())
print("Was labeled ",y_test[image_index])
print("Predicted Probabilities: ",pred)
1/1 [==============================] - 0s 24ms/step Was predicted 3 Was labeled 3 Predicted Probabilities: [[1.2232414e-07 4.0666713e-05 5.0872484e-05 9.9449545e-01 3.3990707e-06 4.3174629e-03 3.8814588e-07 1.3073485e-05 2.7437246e-04 8.0425397e-04]]
training_loss = history.history['loss']
validation_loss = history.history['val_loss']
training_accuracy = history.history['accuracy']
validation_accuracy = history.history['val_accuracy']
import matplotlib.pyplot as plt
plt.figure(figsize=(10, 5))
plt.plot(training_loss, label='Training Loss')
plt.plot(validation_loss, label='Validation Loss')
plt.xlabel('Epochs')
plt.ylabel('Loss')
plt.legend()
plt.show()
plt.figure(figsize=(10, 5))
plt.plot(training_accuracy, label='Training Accuracy')
plt.plot(validation_accuracy, label='Validation Accuracy')
plt.xlabel('Epochs')
plt.ylabel('Accuracy')
plt.legend()
plt.show()
final_training_loss = training_loss[-1]
final_validation_loss = validation_loss[-1]
final_training_accuracy = training_accuracy[-1]
final_validation_accuracy = validation_accuracy[-1]
print(f'Final Training Loss: {final_training_loss:.4f}')
print(f'Final Validation Loss: {final_validation_loss:.4f}')
print(f'Final Training Accuracy: {final_training_accuracy:.4f}')
print(f'Final Validation Accuracy: {final_validation_accuracy:.4f}')
Final Training Loss: 0.7854 Final Validation Loss: 0.7734 Final Training Accuracy: 0.7584 Final Validation Accuracy: 0.7820